In [103]:
import numpy as np
import pandas as pd

from sklearn import preprocessing
from sklearn.base import BaseEstimator, TransformerMixin, ClassifierMixin
from sklearn.linear_model import LassoLarsCV
from sklearn.ensemble import GradientBoostingRegressor
from sklearn.pipeline import make_pipeline
from sklearn.utils import check_array

In [140]:
class StackingEstimator(BaseEstimator):
    
    def __init__(self, estimator):
        self.estimator = estimator
        
    def fit(self, X, y=None, **fit_params):
        self.estimator.fit(X, y, **fit_params)
        return self
    
    def transform(self, X):
        X = check_array(X)
        X_transformed = np.copy(X)
        X_transformed = np.hstack((np.reshape(self.estimator.predict(X), (-1, 1)), X_transformed))
        print(X_transformed.shape)
        print(X_transformed)
        return X_transformed

In [141]:
train = pd.read_csv("data/train.csv", index_col="ID")
test = pd.read_csv("data/test.csv", index_col="ID")

In [142]:
for c in train.columns:
    if train[c].dtype == "object":
        lbl = preprocessing.LabelEncoder()
        lbl.fit(list(train[c].values) + list(test[c].values))
        train[c] = lbl.transform(list(train[c].values))
        test[c] = lbl.transform(list(test[c].values))
        
y_train = train["y"].values

Stacked Pipeline - Automatic


In [144]:
stacked_pipeline = make_pipeline(
    StackingEstimator(estimator=LassoLarsCV(normalize=True)),
    StackingEstimator(estimator=GradientBoostingRegressor(learning_rate=0.001, loss="huber", max_depth=3, max_features=0.55, min_samples_leaf=18, min_samples_split=14, subsample=0.7, random_state=1)),
    LassoLarsCV()
)

In [145]:
stacked_pipeline.fit(train.drop("y", axis=1), y_train)


/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 2 iterations, i.e. alpha=6.025e-02, with an active set of 2 regressors, and the smallest cholesky pivot element being 2.220e-16
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 3 iterations, i.e. alpha=5.597e-02, with an active set of 3 regressors, and the smallest cholesky pivot element being 1.825e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:377: RuntimeWarning: overflow encountered in true_divide
  g1 = arrayfuncs.min_pos((C - Cov) / (AA - corr_eq_dir + tiny))
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:381: RuntimeWarning: overflow encountered in true_divide
  g2 = arrayfuncs.min_pos((C + Cov) / (AA + corr_eq_dir + tiny))
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 4 iterations, i.e. alpha=4.307e-02, with an active set of 4 regressors, and the smallest cholesky pivot element being 1.054e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 5 iterations, i.e. alpha=3.743e-02, with an active set of 5 regressors, and the smallest cholesky pivot element being 2.220e-16
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 9 iterations, i.e. alpha=2.938e-02, with an active set of 9 regressors, and the smallest cholesky pivot element being 2.220e-16
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 12 iterations, i.e. alpha=2.070e-02, with an active set of 12 regressors, and the smallest cholesky pivot element being 1.054e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:334: ConvergenceWarning: Early stopping the lars path, as the residues are small and the current value of alpha is no longer well controlled. 15 iterations, alpha=1.924e-02, previous alpha=1.920e-02, with an active set of 14 regressors.
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 2 iterations, i.e. alpha=5.893e-02, with an active set of 2 regressors, and the smallest cholesky pivot element being 1.054e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 4 iterations, i.e. alpha=3.462e-02, with an active set of 4 regressors, and the smallest cholesky pivot element being 1.054e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 5 iterations, i.e. alpha=2.886e-02, with an active set of 5 regressors, and the smallest cholesky pivot element being 1.054e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 14 iterations, i.e. alpha=1.865e-02, with an active set of 12 regressors, and the smallest cholesky pivot element being 1.054e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 14 iterations, i.e. alpha=1.852e-02, with an active set of 12 regressors, and the smallest cholesky pivot element being 1.054e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 15 iterations, i.e. alpha=1.661e-02, with an active set of 13 regressors, and the smallest cholesky pivot element being 1.054e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 15 iterations, i.e. alpha=1.652e-02, with an active set of 13 regressors, and the smallest cholesky pivot element being 1.490e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 19 iterations, i.e. alpha=1.421e-02, with an active set of 17 regressors, and the smallest cholesky pivot element being 1.054e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 20 iterations, i.e. alpha=1.360e-02, with an active set of 18 regressors, and the smallest cholesky pivot element being 5.162e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:334: ConvergenceWarning: Early stopping the lars path, as the residues are small and the current value of alpha is no longer well controlled. 24 iterations, alpha=1.273e-02, previous alpha=1.237e-02, with an active set of 19 regressors.
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 3 iterations, i.e. alpha=5.688e-02, with an active set of 3 regressors, and the smallest cholesky pivot element being 2.220e-16
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 4 iterations, i.e. alpha=3.728e-02, with an active set of 4 regressors, and the smallest cholesky pivot element being 2.220e-16
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 4 iterations, i.e. alpha=3.728e-02, with an active set of 4 regressors, and the smallest cholesky pivot element being 1.490e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 9 iterations, i.e. alpha=1.933e-02, with an active set of 9 regressors, and the smallest cholesky pivot element being 1.490e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 12 iterations, i.e. alpha=1.826e-02, with an active set of 12 regressors, and the smallest cholesky pivot element being 1.490e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 12 iterations, i.e. alpha=1.826e-02, with an active set of 12 regressors, and the smallest cholesky pivot element being 2.220e-16
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 18 iterations, i.e. alpha=1.075e-02, with an active set of 18 regressors, and the smallest cholesky pivot element being 2.107e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 20 iterations, i.e. alpha=9.662e-03, with an active set of 20 regressors, and the smallest cholesky pivot element being 1.490e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 20 iterations, i.e. alpha=9.662e-03, with an active set of 20 regressors, and the smallest cholesky pivot element being 1.054e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 23 iterations, i.e. alpha=9.090e-03, with an active set of 23 regressors, and the smallest cholesky pivot element being 1.490e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:334: ConvergenceWarning: Early stopping the lars path, as the residues are small and the current value of alpha is no longer well controlled. 27 iterations, alpha=8.497e-03, previous alpha=8.374e-03, with an active set of 26 regressors.
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 3 iterations, i.e. alpha=4.682e-02, with an active set of 3 regressors, and the smallest cholesky pivot element being 1.054e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 3 iterations, i.e. alpha=4.682e-02, with an active set of 3 regressors, and the smallest cholesky pivot element being 1.490e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 3 iterations, i.e. alpha=4.682e-02, with an active set of 3 regressors, and the smallest cholesky pivot element being 2.107e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 4 iterations, i.e. alpha=2.341e-02, with an active set of 4 regressors, and the smallest cholesky pivot element being 1.054e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 4 iterations, i.e. alpha=2.341e-02, with an active set of 4 regressors, and the smallest cholesky pivot element being 2.107e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 4 iterations, i.e. alpha=2.341e-02, with an active set of 4 regressors, and the smallest cholesky pivot element being 1.490e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 14 iterations, i.e. alpha=1.330e-02, with an active set of 14 regressors, and the smallest cholesky pivot element being 1.490e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 15 iterations, i.e. alpha=1.282e-02, with an active set of 15 regressors, and the smallest cholesky pivot element being 2.220e-16
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 16 iterations, i.e. alpha=1.211e-02, with an active set of 16 regressors, and the smallest cholesky pivot element being 2.220e-16
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 17 iterations, i.e. alpha=1.130e-02, with an active set of 17 regressors, and the smallest cholesky pivot element being 2.107e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 17 iterations, i.e. alpha=1.130e-02, with an active set of 17 regressors, and the smallest cholesky pivot element being 1.054e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 19 iterations, i.e. alpha=1.049e-02, with an active set of 19 regressors, and the smallest cholesky pivot element being 1.490e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 19 iterations, i.e. alpha=1.049e-02, with an active set of 19 regressors, and the smallest cholesky pivot element being 2.107e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 21 iterations, i.e. alpha=9.503e-03, with an active set of 19 regressors, and the smallest cholesky pivot element being 1.490e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 21 iterations, i.e. alpha=9.364e-03, with an active set of 19 regressors, and the smallest cholesky pivot element being 1.490e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 21 iterations, i.e. alpha=9.326e-03, with an active set of 19 regressors, and the smallest cholesky pivot element being 1.490e-08
  ConvergenceWarning)
(4209, 377)
[[ 104.48433317   37.           23.         ...,    0.            0.            0.        ]
 [  96.12049845   37.           21.         ...,    0.            0.            0.        ]
 [  79.98099226   24.           24.         ...,    0.            0.            0.        ]
 ..., 
 [ 109.14565988   10.           23.         ...,    0.            0.            0.        ]
 [  92.93850915   11.           19.         ...,    0.            0.            0.        ]
 [  95.75150277   52.           19.         ...,    0.            0.            0.        ]]
(4209, 378)
[[  99.88477548  104.48433317   37.         ...,    0.            0.            0.        ]
 [  98.540056     96.12049845   37.         ...,    0.            0.            0.        ]
 [  97.11279097   79.98099226   24.         ...,    0.            0.            0.        ]
 ..., 
 [ 100.37431896  109.14565988   10.         ...,    0.            0.            0.        ]
 [  98.60704572   92.93850915   11.         ...,    0.            0.            0.        ]
 [  98.63825961   95.75150277   52.         ...,    0.            0.            0.        ]]
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 4 iterations, i.e. alpha=8.423e-03, with an active set of 4 regressors, and the smallest cholesky pivot element being 2.107e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 37 iterations, i.e. alpha=3.153e-03, with an active set of 37 regressors, and the smallest cholesky pivot element being 1.490e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 43 iterations, i.e. alpha=2.840e-03, with an active set of 43 regressors, and the smallest cholesky pivot element being 1.054e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 52 iterations, i.e. alpha=2.652e-03, with an active set of 52 regressors, and the smallest cholesky pivot element being 1.490e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 58 iterations, i.e. alpha=2.416e-03, with an active set of 58 regressors, and the smallest cholesky pivot element being 1.490e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 75 iterations, i.e. alpha=2.154e-03, with an active set of 75 regressors, and the smallest cholesky pivot element being 1.490e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 80 iterations, i.e. alpha=1.998e-03, with an active set of 78 regressors, and the smallest cholesky pivot element being 1.490e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 82 iterations, i.e. alpha=1.909e-03, with an active set of 80 regressors, and the smallest cholesky pivot element being 2.107e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 84 iterations, i.e. alpha=1.860e-03, with an active set of 82 regressors, and the smallest cholesky pivot element being 1.490e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 86 iterations, i.e. alpha=1.793e-03, with an active set of 84 regressors, and the smallest cholesky pivot element being 2.220e-16
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 86 iterations, i.e. alpha=1.793e-03, with an active set of 84 regressors, and the smallest cholesky pivot element being 1.490e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 86 iterations, i.e. alpha=1.793e-03, with an active set of 84 regressors, and the smallest cholesky pivot element being 6.664e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 88 iterations, i.e. alpha=1.711e-03, with an active set of 84 regressors, and the smallest cholesky pivot element being 2.220e-16
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 94 iterations, i.e. alpha=1.627e-03, with an active set of 88 regressors, and the smallest cholesky pivot element being 2.107e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 94 iterations, i.e. alpha=1.625e-03, with an active set of 88 regressors, and the smallest cholesky pivot element being 2.107e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:334: ConvergenceWarning: Early stopping the lars path, as the residues are small and the current value of alpha is no longer well controlled. 95 iterations, alpha=1.619e-03, previous alpha=1.614e-03, with an active set of 88 regressors.
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 21 iterations, i.e. alpha=4.471e-03, with an active set of 21 regressors, and the smallest cholesky pivot element being 1.054e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 34 iterations, i.e. alpha=3.487e-03, with an active set of 34 regressors, and the smallest cholesky pivot element being 2.220e-16
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 37 iterations, i.e. alpha=3.135e-03, with an active set of 37 regressors, and the smallest cholesky pivot element being 2.220e-16
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 37 iterations, i.e. alpha=3.135e-03, with an active set of 37 regressors, and the smallest cholesky pivot element being 1.490e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 37 iterations, i.e. alpha=3.135e-03, with an active set of 37 regressors, and the smallest cholesky pivot element being 1.054e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 56 iterations, i.e. alpha=2.399e-03, with an active set of 56 regressors, and the smallest cholesky pivot element being 1.054e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 56 iterations, i.e. alpha=2.399e-03, with an active set of 56 regressors, and the smallest cholesky pivot element being 6.664e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 60 iterations, i.e. alpha=2.252e-03, with an active set of 60 regressors, and the smallest cholesky pivot element being 2.356e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 61 iterations, i.e. alpha=2.250e-03, with an active set of 61 regressors, and the smallest cholesky pivot element being 2.220e-16
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 69 iterations, i.e. alpha=2.001e-03, with an active set of 69 regressors, and the smallest cholesky pivot element being 2.220e-16
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 74 iterations, i.e. alpha=1.846e-03, with an active set of 74 regressors, and the smallest cholesky pivot element being 2.220e-16
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 84 iterations, i.e. alpha=1.695e-03, with an active set of 84 regressors, and the smallest cholesky pivot element being 1.490e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 84 iterations, i.e. alpha=1.695e-03, with an active set of 84 regressors, and the smallest cholesky pivot element being 2.356e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 84 iterations, i.e. alpha=1.695e-03, with an active set of 84 regressors, and the smallest cholesky pivot element being 4.215e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 85 iterations, i.e. alpha=1.671e-03, with an active set of 85 regressors, and the smallest cholesky pivot element being 1.825e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 90 iterations, i.e. alpha=1.545e-03, with an active set of 90 regressors, and the smallest cholesky pivot element being 1.054e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 90 iterations, i.e. alpha=1.545e-03, with an active set of 90 regressors, and the smallest cholesky pivot element being 1.490e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:334: ConvergenceWarning: Early stopping the lars path, as the residues are small and the current value of alpha is no longer well controlled. 93 iterations, alpha=1.560e-03, previous alpha=1.512e-03, with an active set of 92 regressors.
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 8 iterations, i.e. alpha=7.799e-03, with an active set of 8 regressors, and the smallest cholesky pivot element being 1.054e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 9 iterations, i.e. alpha=7.344e-03, with an active set of 9 regressors, and the smallest cholesky pivot element being 1.054e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 21 iterations, i.e. alpha=4.300e-03, with an active set of 21 regressors, and the smallest cholesky pivot element being 1.490e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 33 iterations, i.e. alpha=3.726e-03, with an active set of 33 regressors, and the smallest cholesky pivot element being 1.054e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 33 iterations, i.e. alpha=3.726e-03, with an active set of 33 regressors, and the smallest cholesky pivot element being 2.107e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 33 iterations, i.e. alpha=3.726e-03, with an active set of 33 regressors, and the smallest cholesky pivot element being 4.215e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 35 iterations, i.e. alpha=3.659e-03, with an active set of 35 regressors, and the smallest cholesky pivot element being 1.054e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 39 iterations, i.e. alpha=3.364e-03, with an active set of 39 regressors, and the smallest cholesky pivot element being 1.054e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 43 iterations, i.e. alpha=3.241e-03, with an active set of 43 regressors, and the smallest cholesky pivot element being 1.054e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:334: ConvergenceWarning: Early stopping the lars path, as the residues are small and the current value of alpha is no longer well controlled. 44 iterations, alpha=3.239e-03, previous alpha=3.205e-03, with an active set of 43 regressors.
  ConvergenceWarning)
Out[145]:
Pipeline(steps=[('stackingestimator-1', StackingEstimator(estimator=LassoLarsCV(copy_X=True, cv=None, eps=2.2204460492503131e-16,
      fit_intercept=True, max_iter=500, max_n_alphas=1000, n_jobs=1,
      normalize=True, positive=False, precompute='auto', verbose=False))), ('stackingestimator-2', StackingEst...x_n_alphas=1000, n_jobs=1,
      normalize=True, positive=False, precompute='auto', verbose=False))])

In [154]:
results = stacked_pipeline.predict(train.drop("y", axis=1))


(4209, 377)
[[ 104.48433317   37.           23.         ...,    0.            0.            0.        ]
 [  96.12049845   37.           21.         ...,    0.            0.            0.        ]
 [  79.98099226   24.           24.         ...,    0.            0.            0.        ]
 ..., 
 [ 109.14565988   10.           23.         ...,    0.            0.            0.        ]
 [  92.93850915   11.           19.         ...,    0.            0.            0.        ]
 [  95.75150277   52.           19.         ...,    0.            0.            0.        ]]
(4209, 378)
[[  99.88477548  104.48433317   37.         ...,    0.            0.            0.        ]
 [  98.540056     96.12049845   37.         ...,    0.            0.            0.        ]
 [  97.11279097   79.98099226   24.         ...,    0.            0.            0.        ]
 ..., 
 [ 100.37431896  109.14565988   10.         ...,    0.            0.            0.        ]
 [  98.60704572   92.93850915   11.         ...,    0.            0.            0.        ]
 [  98.63825961   95.75150277   52.         ...,    0.            0.            0.        ]]

In [155]:
results


Out[155]:
array([ 107.02741057,   93.58506525,   79.25021499, ...,  111.93607051,
         94.22134843,   94.55766098])

Stacked Pipeline - Manual


In [132]:
X = train.drop("y", axis=1)
estimator = LassoLarsCV(normalize=True)
estimator.fit(X, y_train)


/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 2 iterations, i.e. alpha=6.025e-02, with an active set of 2 regressors, and the smallest cholesky pivot element being 2.220e-16
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 3 iterations, i.e. alpha=5.597e-02, with an active set of 3 regressors, and the smallest cholesky pivot element being 1.825e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:377: RuntimeWarning: overflow encountered in true_divide
  g1 = arrayfuncs.min_pos((C - Cov) / (AA - corr_eq_dir + tiny))
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:381: RuntimeWarning: overflow encountered in true_divide
  g2 = arrayfuncs.min_pos((C + Cov) / (AA + corr_eq_dir + tiny))
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 4 iterations, i.e. alpha=4.307e-02, with an active set of 4 regressors, and the smallest cholesky pivot element being 1.054e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 5 iterations, i.e. alpha=3.743e-02, with an active set of 5 regressors, and the smallest cholesky pivot element being 2.220e-16
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 9 iterations, i.e. alpha=2.938e-02, with an active set of 9 regressors, and the smallest cholesky pivot element being 2.220e-16
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 12 iterations, i.e. alpha=2.070e-02, with an active set of 12 regressors, and the smallest cholesky pivot element being 1.054e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:334: ConvergenceWarning: Early stopping the lars path, as the residues are small and the current value of alpha is no longer well controlled. 15 iterations, alpha=1.924e-02, previous alpha=1.920e-02, with an active set of 14 regressors.
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 2 iterations, i.e. alpha=5.893e-02, with an active set of 2 regressors, and the smallest cholesky pivot element being 1.054e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 4 iterations, i.e. alpha=3.462e-02, with an active set of 4 regressors, and the smallest cholesky pivot element being 1.054e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 5 iterations, i.e. alpha=2.886e-02, with an active set of 5 regressors, and the smallest cholesky pivot element being 1.054e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 14 iterations, i.e. alpha=1.865e-02, with an active set of 12 regressors, and the smallest cholesky pivot element being 1.054e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 14 iterations, i.e. alpha=1.852e-02, with an active set of 12 regressors, and the smallest cholesky pivot element being 1.054e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 15 iterations, i.e. alpha=1.661e-02, with an active set of 13 regressors, and the smallest cholesky pivot element being 1.054e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 15 iterations, i.e. alpha=1.652e-02, with an active set of 13 regressors, and the smallest cholesky pivot element being 1.490e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 19 iterations, i.e. alpha=1.421e-02, with an active set of 17 regressors, and the smallest cholesky pivot element being 1.054e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 20 iterations, i.e. alpha=1.360e-02, with an active set of 18 regressors, and the smallest cholesky pivot element being 5.162e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:334: ConvergenceWarning: Early stopping the lars path, as the residues are small and the current value of alpha is no longer well controlled. 24 iterations, alpha=1.273e-02, previous alpha=1.237e-02, with an active set of 19 regressors.
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 3 iterations, i.e. alpha=5.688e-02, with an active set of 3 regressors, and the smallest cholesky pivot element being 2.220e-16
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 4 iterations, i.e. alpha=3.728e-02, with an active set of 4 regressors, and the smallest cholesky pivot element being 2.220e-16
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 4 iterations, i.e. alpha=3.728e-02, with an active set of 4 regressors, and the smallest cholesky pivot element being 1.490e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 9 iterations, i.e. alpha=1.933e-02, with an active set of 9 regressors, and the smallest cholesky pivot element being 1.490e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 12 iterations, i.e. alpha=1.826e-02, with an active set of 12 regressors, and the smallest cholesky pivot element being 1.490e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 12 iterations, i.e. alpha=1.826e-02, with an active set of 12 regressors, and the smallest cholesky pivot element being 2.220e-16
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 18 iterations, i.e. alpha=1.075e-02, with an active set of 18 regressors, and the smallest cholesky pivot element being 2.107e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 20 iterations, i.e. alpha=9.662e-03, with an active set of 20 regressors, and the smallest cholesky pivot element being 1.490e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 20 iterations, i.e. alpha=9.662e-03, with an active set of 20 regressors, and the smallest cholesky pivot element being 1.054e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 23 iterations, i.e. alpha=9.090e-03, with an active set of 23 regressors, and the smallest cholesky pivot element being 1.490e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:334: ConvergenceWarning: Early stopping the lars path, as the residues are small and the current value of alpha is no longer well controlled. 27 iterations, alpha=8.497e-03, previous alpha=8.374e-03, with an active set of 26 regressors.
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 3 iterations, i.e. alpha=4.682e-02, with an active set of 3 regressors, and the smallest cholesky pivot element being 1.054e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 3 iterations, i.e. alpha=4.682e-02, with an active set of 3 regressors, and the smallest cholesky pivot element being 1.490e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 3 iterations, i.e. alpha=4.682e-02, with an active set of 3 regressors, and the smallest cholesky pivot element being 2.107e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 4 iterations, i.e. alpha=2.341e-02, with an active set of 4 regressors, and the smallest cholesky pivot element being 1.054e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 4 iterations, i.e. alpha=2.341e-02, with an active set of 4 regressors, and the smallest cholesky pivot element being 2.107e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 4 iterations, i.e. alpha=2.341e-02, with an active set of 4 regressors, and the smallest cholesky pivot element being 1.490e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 14 iterations, i.e. alpha=1.330e-02, with an active set of 14 regressors, and the smallest cholesky pivot element being 1.490e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 15 iterations, i.e. alpha=1.282e-02, with an active set of 15 regressors, and the smallest cholesky pivot element being 2.220e-16
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 16 iterations, i.e. alpha=1.211e-02, with an active set of 16 regressors, and the smallest cholesky pivot element being 2.220e-16
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 17 iterations, i.e. alpha=1.130e-02, with an active set of 17 regressors, and the smallest cholesky pivot element being 2.107e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 17 iterations, i.e. alpha=1.130e-02, with an active set of 17 regressors, and the smallest cholesky pivot element being 1.054e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 19 iterations, i.e. alpha=1.049e-02, with an active set of 19 regressors, and the smallest cholesky pivot element being 1.490e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 19 iterations, i.e. alpha=1.049e-02, with an active set of 19 regressors, and the smallest cholesky pivot element being 2.107e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 21 iterations, i.e. alpha=9.503e-03, with an active set of 19 regressors, and the smallest cholesky pivot element being 1.490e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 21 iterations, i.e. alpha=9.364e-03, with an active set of 19 regressors, and the smallest cholesky pivot element being 1.490e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 21 iterations, i.e. alpha=9.326e-03, with an active set of 19 regressors, and the smallest cholesky pivot element being 1.490e-08
  ConvergenceWarning)
Out[132]:
LassoLarsCV(copy_X=True, cv=None, eps=2.2204460492503131e-16,
      fit_intercept=True, max_iter=500, max_n_alphas=1000, n_jobs=1,
      normalize=True, positive=False, precompute='auto', verbose=False)

In [148]:
X = check_array(X)

In [149]:
X = check_array(X)
X_transformed = np.copy(X)
X_transformed = np.hstack((np.reshape(estimator.predict(X), (-1, 1)), X_transformed))

In [150]:
X_transformed


Out[150]:
array([[ 104.48433317,   37.        ,   23.        , ...,    0.        ,
           0.        ,    0.        ],
       [  96.12049845,   37.        ,   21.        , ...,    0.        ,
           0.        ,    0.        ],
       [  79.98099226,   24.        ,   24.        , ...,    0.        ,
           0.        ,    0.        ],
       ..., 
       [ 109.14565988,   10.        ,   23.        , ...,    0.        ,
           0.        ,    0.        ],
       [  92.93850915,   11.        ,   19.        , ...,    0.        ,
           0.        ,    0.        ],
       [  95.75150277,   52.        ,   19.        , ...,    0.        ,
           0.        ,    0.        ]])

In [151]:
X_transformed.shape


Out[151]:
(4209, 377)

In [152]:
estimator_2 = GradientBoostingRegressor(learning_rate=0.001, loss="huber", max_depth=3, max_features=0.55, min_samples_leaf=18, min_samples_split=14, subsample=0.7, random_state=1)
estimator_2.fit(X_transformed, y_train)
X_transformed2 = np.copy(X_transformed)
X_transformed2 = np.hstack((np.reshape(estimator_2.predict(X_transformed), (-1, 1)), X_transformed2))

In [153]:
X_transformed2


Out[153]:
array([[  99.88477548,  104.48433317,   37.        , ...,    0.        ,
           0.        ,    0.        ],
       [  98.540056  ,   96.12049845,   37.        , ...,    0.        ,
           0.        ,    0.        ],
       [  97.11279097,   79.98099226,   24.        , ...,    0.        ,
           0.        ,    0.        ],
       ..., 
       [ 100.37431896,  109.14565988,   10.        , ...,    0.        ,
           0.        ,    0.        ],
       [  98.60704572,   92.93850915,   11.        , ...,    0.        ,
           0.        ,    0.        ],
       [  98.63825961,   95.75150277,   52.        , ...,    0.        ,
           0.        ,    0.        ]])

In [137]:
X_transformed2.shape


Out[137]:
(4209, 378)

In [156]:
estimator_3 = LassoLarsCV()
estimator_3.fit(X_transformed2, y_train)
estimator_3.predict(X_transformed2)


/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 4 iterations, i.e. alpha=8.423e-03, with an active set of 4 regressors, and the smallest cholesky pivot element being 2.107e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:377: RuntimeWarning: overflow encountered in true_divide
  g1 = arrayfuncs.min_pos((C - Cov) / (AA - corr_eq_dir + tiny))
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 37 iterations, i.e. alpha=3.153e-03, with an active set of 37 regressors, and the smallest cholesky pivot element being 1.490e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 43 iterations, i.e. alpha=2.840e-03, with an active set of 43 regressors, and the smallest cholesky pivot element being 1.054e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 52 iterations, i.e. alpha=2.652e-03, with an active set of 52 regressors, and the smallest cholesky pivot element being 1.490e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 58 iterations, i.e. alpha=2.416e-03, with an active set of 58 regressors, and the smallest cholesky pivot element being 1.490e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 75 iterations, i.e. alpha=2.154e-03, with an active set of 75 regressors, and the smallest cholesky pivot element being 1.490e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 80 iterations, i.e. alpha=1.998e-03, with an active set of 78 regressors, and the smallest cholesky pivot element being 1.490e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:381: RuntimeWarning: overflow encountered in true_divide
  g2 = arrayfuncs.min_pos((C + Cov) / (AA + corr_eq_dir + tiny))
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 82 iterations, i.e. alpha=1.909e-03, with an active set of 80 regressors, and the smallest cholesky pivot element being 2.107e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 84 iterations, i.e. alpha=1.860e-03, with an active set of 82 regressors, and the smallest cholesky pivot element being 1.490e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 86 iterations, i.e. alpha=1.793e-03, with an active set of 84 regressors, and the smallest cholesky pivot element being 2.220e-16
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 86 iterations, i.e. alpha=1.793e-03, with an active set of 84 regressors, and the smallest cholesky pivot element being 1.490e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 86 iterations, i.e. alpha=1.793e-03, with an active set of 84 regressors, and the smallest cholesky pivot element being 6.664e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 88 iterations, i.e. alpha=1.711e-03, with an active set of 84 regressors, and the smallest cholesky pivot element being 2.220e-16
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 94 iterations, i.e. alpha=1.627e-03, with an active set of 88 regressors, and the smallest cholesky pivot element being 2.107e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 94 iterations, i.e. alpha=1.625e-03, with an active set of 88 regressors, and the smallest cholesky pivot element being 2.107e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:334: ConvergenceWarning: Early stopping the lars path, as the residues are small and the current value of alpha is no longer well controlled. 95 iterations, alpha=1.619e-03, previous alpha=1.614e-03, with an active set of 88 regressors.
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 21 iterations, i.e. alpha=4.471e-03, with an active set of 21 regressors, and the smallest cholesky pivot element being 1.054e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 34 iterations, i.e. alpha=3.487e-03, with an active set of 34 regressors, and the smallest cholesky pivot element being 2.220e-16
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 37 iterations, i.e. alpha=3.135e-03, with an active set of 37 regressors, and the smallest cholesky pivot element being 2.220e-16
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 37 iterations, i.e. alpha=3.135e-03, with an active set of 37 regressors, and the smallest cholesky pivot element being 1.490e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 37 iterations, i.e. alpha=3.135e-03, with an active set of 37 regressors, and the smallest cholesky pivot element being 1.054e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 56 iterations, i.e. alpha=2.399e-03, with an active set of 56 regressors, and the smallest cholesky pivot element being 1.054e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 56 iterations, i.e. alpha=2.399e-03, with an active set of 56 regressors, and the smallest cholesky pivot element being 6.664e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 60 iterations, i.e. alpha=2.252e-03, with an active set of 60 regressors, and the smallest cholesky pivot element being 2.356e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 61 iterations, i.e. alpha=2.250e-03, with an active set of 61 regressors, and the smallest cholesky pivot element being 2.220e-16
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 69 iterations, i.e. alpha=2.001e-03, with an active set of 69 regressors, and the smallest cholesky pivot element being 2.220e-16
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 74 iterations, i.e. alpha=1.846e-03, with an active set of 74 regressors, and the smallest cholesky pivot element being 2.220e-16
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 84 iterations, i.e. alpha=1.695e-03, with an active set of 84 regressors, and the smallest cholesky pivot element being 1.490e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 84 iterations, i.e. alpha=1.695e-03, with an active set of 84 regressors, and the smallest cholesky pivot element being 2.356e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 84 iterations, i.e. alpha=1.695e-03, with an active set of 84 regressors, and the smallest cholesky pivot element being 4.215e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 85 iterations, i.e. alpha=1.671e-03, with an active set of 85 regressors, and the smallest cholesky pivot element being 1.825e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 90 iterations, i.e. alpha=1.545e-03, with an active set of 90 regressors, and the smallest cholesky pivot element being 1.054e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 90 iterations, i.e. alpha=1.545e-03, with an active set of 90 regressors, and the smallest cholesky pivot element being 1.490e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:334: ConvergenceWarning: Early stopping the lars path, as the residues are small and the current value of alpha is no longer well controlled. 93 iterations, alpha=1.560e-03, previous alpha=1.512e-03, with an active set of 92 regressors.
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 8 iterations, i.e. alpha=7.799e-03, with an active set of 8 regressors, and the smallest cholesky pivot element being 1.054e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 9 iterations, i.e. alpha=7.344e-03, with an active set of 9 regressors, and the smallest cholesky pivot element being 1.054e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 21 iterations, i.e. alpha=4.300e-03, with an active set of 21 regressors, and the smallest cholesky pivot element being 1.490e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 33 iterations, i.e. alpha=3.726e-03, with an active set of 33 regressors, and the smallest cholesky pivot element being 1.054e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 33 iterations, i.e. alpha=3.726e-03, with an active set of 33 regressors, and the smallest cholesky pivot element being 2.107e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 33 iterations, i.e. alpha=3.726e-03, with an active set of 33 regressors, and the smallest cholesky pivot element being 4.215e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 35 iterations, i.e. alpha=3.659e-03, with an active set of 35 regressors, and the smallest cholesky pivot element being 1.054e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 39 iterations, i.e. alpha=3.364e-03, with an active set of 39 regressors, and the smallest cholesky pivot element being 1.054e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:309: ConvergenceWarning: Regressors in active set degenerate. Dropping a regressor, after 43 iterations, i.e. alpha=3.241e-03, with an active set of 43 regressors, and the smallest cholesky pivot element being 1.054e-08
  ConvergenceWarning)
/Users/datitran/anaconda/envs/kaggle/lib/python3.5/site-packages/sklearn/linear_model/least_angle.py:334: ConvergenceWarning: Early stopping the lars path, as the residues are small and the current value of alpha is no longer well controlled. 44 iterations, alpha=3.239e-03, previous alpha=3.205e-03, with an active set of 43 regressors.
  ConvergenceWarning)
Out[156]:
array([ 107.02741057,   93.58506525,   79.25021499, ...,  111.93607051,
         94.22134843,   94.55766098])

In [ ]:


In [ ]:


In [ ]: